Loading libraries¶

In [ ]:
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
import tensorflow as tf
import seaborn as sns
sns.set()
from PIL import Image
import cv2
import os
import scipy as sp
import IPython
from tensorflow.keras.utils import to_categorical
from keras.callbacks import EarlyStopping, ReduceLROnPlateau
from tensorflow.keras.optimizers import Adam
from sklearn.metrics import mean_absolute_error, confusion_matrix, classification_report
from tensorflow.keras.layers import Input, Conv2D, BatchNormalization, LeakyReLU, MaxPooling2D, Flatten, Dense, Dropout
from tensorflow.keras.models import Model
from sklearn.model_selection import train_test_split
import tensorflow as tf
from tensorflow import keras
from keras.models import Model
from tensorflow.keras import layers
from tensorflow.keras.layers import Dense, Dropout, Flatten
from tensorflow.keras.layers import Conv2D, MaxPooling2D
from tensorflow.keras.optimizers import SGD
from tensorflow.keras.layers import BatchNormalization
from tensorflow.keras.layers import LeakyReLU
from tensorflow.keras.models import clone_model
from tensorflow.keras.callbacks import ReduceLROnPlateau
from tensorflow.keras.callbacks import EarlyStopping
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.utils import plot_model
from tensorflow.keras import regularizers
from tensorflow.keras import initializers
from tensorflow.keras.models import load_model

Mounting drive

In [ ]:
from google.colab import drive
drive.mount('/content/drive',force_remount=True)
Mounted at /content/drive

Reading masked data¶

In [ ]:
#Loading the mask superimposed UTK dataset
df_final = pd.read_csv('/content/drive/MyDrive/OCCLUDED.csv')
In [ ]:
#Insight to data
df_final.head()
Out[ ]:
age gender race pixels
0 49 1 4 250 237 222 205 185 167 178 151 128 171 137 10...
1 50 0 0 102 85 59 93 75 50 72 54 32 74 53 34 103 72 45...
2 50 0 0 97 62 36 109 72 46 135 102 80 189 165 148 242 ...
3 50 0 0 59 107 131 62 104 122 69 102 112 80 108 112 13...
4 50 0 0 0 9 6 1 5 11 1 1 10 13 10 16 25 24 21 33 26 15...
In [ ]:
#labels of ethncity
df_final=df_final.loc[(df_final.race==0 ) | ( df_final.race==1) | ( df_final.race==2) | ( df_final.race==3) | (df_final.race==4)]
In [ ]:
#0 - represents white, 1 - represents black,2 - represents asian,3 - represents indian and 4 - represents others
df_final.race.value_counts()
Out[ ]:
0    8815
1    4109
3    3468
2    2573
4    1402
Name: race, dtype: int64
In [ ]:
# 0 represents male and 1 represents female
df_final.gender.value_counts()
Out[ ]:
0    10492
1     9875
Name: gender, dtype: int64
In [ ]:
#Getting the number of people belonging to each age
df_final.age.value_counts()
Out[ ]:
26     2095
28      893
35      866
24      833
25      714
       ... 
116       3
101       2
91        2
111       1
68        1
Name: age, Length: 95, dtype: int64
In [ ]:
df_final['pixels'] = df_final['pixels'].apply(lambda x: np.array(x.split(), dtype="float32"))
In [ ]:
# normalizing pixels data
df_final['pixels'] = df_final['pixels'].apply(lambda x: x/255)
df_final = df_final.sample(frac=1).reset_index(drop=True)
In [ ]:
X = np.array(df_final['pixels'].tolist())
df_final
Out[ ]:
age gender race pixels
0 26 1 4 [0.7176471, 0.7372549, 0.7529412, 0.7058824, 0...
1 5 0 3 [0.39215687, 0.3764706, 0.36862746, 0.34509805...
2 21 0 0 [0.49411765, 0.49803922, 0.47843137, 0.5019608...
3 35 0 0 [0.2784314, 0.29803923, 0.30980393, 0.28627452...
4 51 1 1 [0.1764706, 0.16078432, 0.14901961, 0.18039216...
... ... ... ... ...
20362 32 0 0 [0.42352942, 0.41960785, 0.44705883, 0.3960784...
20363 27 1 3 [0.89411765, 0.92941177, 0.9254902, 0.75686276...
20364 32 1 1 [0.3019608, 0.16862746, 0.07058824, 0.39215687...
20365 26 0 1 [0.45882353, 0.4392157, 0.41568628, 0.45490196...
20366 24 0 0 [0.3529412, 0.3019608, 0.29411766, 0.36078432,...

20367 rows × 4 columns

In [ ]:
X = X.reshape(X.shape[0],50,50,3)
In [ ]:
y_new = np.array(df_final[['gender', 'race', 'age']])
print(y_new)
[[ 1  4 26]
 [ 0  3  5]
 [ 0  0 21]
 ...
 [ 1  1 32]
 [ 0  1 26]
 [ 0  0 24]]
In [ ]:
# Splitting the data
# Training-60%, Validation-20%, Testing-20%
X_train, X_test, y_train, y_test = train_test_split(X, y_new, test_size=0.2, random_state=42)
X_train, X_cv, y_train, y_cv = train_test_split(X_train,y_train,test_size = 0.25,train_size =0.75)
In [ ]:
# Segregating the labels into different arrays
y_gender_train = y_train[:,0]
y_gender_test = y_test[:,0]
y_gender_cv = y_cv[:,0]
In [ ]:
y_ethnicity_train = y_train[:,1]
y_ethnicity_test = y_test[:,1]
y_ethnicity_cv = y_cv[:,1]
In [ ]:
y_age_train = y_train[:,2]
y_age_test = y_test[:,2]
y_age_cv = y_cv[:,2]
In [ ]:
len(y_ethnicity_cv)
Out[ ]:
4074
In [ ]:
#Finding the position to slice
eth_train_len = len(y_ethnicity_train)
eth_cv_len=len(y_ethnicity_cv)
y_ethnicity_concat = np.concatenate((y_ethnicity_train,y_ethnicity_cv, y_ethnicity_test))
y_ethnicity_concat = y_ethnicity_concat.astype(np.uint8)
y_ethnicity = to_categorical(y_ethnicity_concat)
In [ ]:
#One hot encoding- For example if a person is of ethnicity 'Indian' - 3, we one hot encode it so that it is saves as [0 0 0 1 0]. The '1' in the third position indicates that the ethnicity of the person in the image is 'Indian'.
y_ethnicity_train = y_ethnicity[:eth_train_len]
y_ethnicity_cv=y_ethnicity[eth_train_len:(eth_train_len+eth_cv_len)]
y_ethnicity_test = y_ethnicity[(eth_train_len+eth_cv_len):]

ALEXNET Model¶

In [ ]:
#MODEL ARCHITECTURE
# Define the input shape for the model
input_shape = (50, 50, 3)

# Define the input layer
inputs = Input(shape=input_shape)
x = inputs

# Architecture of the model
x = Conv2D(96, kernel_size=(11, 11), strides=4, padding='same')(x)
x = BatchNormalization()(x)
x = LeakyReLU(0.3)(x)
x = MaxPooling2D(pool_size=(2, 2), strides=2)(x)

x = Conv2D(256, kernel_size=(5, 5), padding='same')(x)
x = BatchNormalization()(x)
x = LeakyReLU(0.3)(x)
x = MaxPooling2D(pool_size=(2, 2), strides=2)(x)

x = Conv2D(384, kernel_size=(3, 3), padding='same')(x)
x = BatchNormalization()(x)
x = LeakyReLU(0.1)(x)

x = Conv2D(384, kernel_size=(3, 3), padding='same')(x)
x = BatchNormalization()(x)
x = LeakyReLU(0.1)(x)

x = Conv2D(256, kernel_size=(3, 3), padding='same')(x)
x = BatchNormalization()(x)
x = LeakyReLU(0.1)(x)
x = MaxPooling2D(pool_size=(2, 2), strides=2)(x)

x = Flatten()(x)

x1 = Dense(1000, activation='relu')(x)
x1 = Dense(1000, activation='relu')(x1)

x2 = Dense(1000, activation='relu')(x)
x2 = Dense(1000, activation='relu')(x2)

x3 = Dense(4096, activation='relu')(x)
x3 = Dropout(0.5)(x3)
x3 = Dense(4096, activation='relu')(x3)
x3 = Dropout(0.5)(x3)

# Output layers
out_gender = Dense(1, activation='sigmoid', name='gender_out')(x2)
out_ethnicity = Dense(5, activation='softmax', name='ethnicity_out')(x1)
out_age = Dense(1, name='age_out')(x3)

# Create the model
model = Model(inputs=inputs, outputs=[out_gender, out_ethnicity, out_age])

# Compile the model
model.compile(
    optimizer='Adam',
    loss={'gender_out': 'binary_crossentropy',
          'ethnicity_out': 'categorical_crossentropy',
          'age_out': 'mean_squared_error'},
    metrics={'gender_out': 'accuracy',
             'ethnicity_out': 'accuracy',
             'age_out': 'mae'}
)

model.summary()
Model: "model"
__________________________________________________________________________________________________
 Layer (type)                   Output Shape         Param #     Connected to                     
==================================================================================================
 input_1 (InputLayer)           [(None, 50, 50, 3)]  0           []                               
                                                                                                  
 conv2d (Conv2D)                (None, 13, 13, 96)   34944       ['input_1[0][0]']                
                                                                                                  
 batch_normalization (BatchNorm  (None, 13, 13, 96)  384         ['conv2d[0][0]']                 
 alization)                                                                                       
                                                                                                  
 leaky_re_lu (LeakyReLU)        (None, 13, 13, 96)   0           ['batch_normalization[0][0]']    
                                                                                                  
 max_pooling2d (MaxPooling2D)   (None, 6, 6, 96)     0           ['leaky_re_lu[0][0]']            
                                                                                                  
 conv2d_1 (Conv2D)              (None, 6, 6, 256)    614656      ['max_pooling2d[0][0]']          
                                                                                                  
 batch_normalization_1 (BatchNo  (None, 6, 6, 256)   1024        ['conv2d_1[0][0]']               
 rmalization)                                                                                     
                                                                                                  
 leaky_re_lu_1 (LeakyReLU)      (None, 6, 6, 256)    0           ['batch_normalization_1[0][0]']  
                                                                                                  
 max_pooling2d_1 (MaxPooling2D)  (None, 3, 3, 256)   0           ['leaky_re_lu_1[0][0]']          
                                                                                                  
 conv2d_2 (Conv2D)              (None, 3, 3, 384)    885120      ['max_pooling2d_1[0][0]']        
                                                                                                  
 batch_normalization_2 (BatchNo  (None, 3, 3, 384)   1536        ['conv2d_2[0][0]']               
 rmalization)                                                                                     
                                                                                                  
 leaky_re_lu_2 (LeakyReLU)      (None, 3, 3, 384)    0           ['batch_normalization_2[0][0]']  
                                                                                                  
 conv2d_3 (Conv2D)              (None, 3, 3, 384)    1327488     ['leaky_re_lu_2[0][0]']          
                                                                                                  
 batch_normalization_3 (BatchNo  (None, 3, 3, 384)   1536        ['conv2d_3[0][0]']               
 rmalization)                                                                                     
                                                                                                  
 leaky_re_lu_3 (LeakyReLU)      (None, 3, 3, 384)    0           ['batch_normalization_3[0][0]']  
                                                                                                  
 conv2d_4 (Conv2D)              (None, 3, 3, 256)    884992      ['leaky_re_lu_3[0][0]']          
                                                                                                  
 batch_normalization_4 (BatchNo  (None, 3, 3, 256)   1024        ['conv2d_4[0][0]']               
 rmalization)                                                                                     
                                                                                                  
 leaky_re_lu_4 (LeakyReLU)      (None, 3, 3, 256)    0           ['batch_normalization_4[0][0]']  
                                                                                                  
 max_pooling2d_2 (MaxPooling2D)  (None, 1, 1, 256)   0           ['leaky_re_lu_4[0][0]']          
                                                                                                  
 flatten (Flatten)              (None, 256)          0           ['max_pooling2d_2[0][0]']        
                                                                                                  
 dense_4 (Dense)                (None, 4096)         1052672     ['flatten[0][0]']                
                                                                                                  
 dropout (Dropout)              (None, 4096)         0           ['dense_4[0][0]']                
                                                                                                  
 dense_2 (Dense)                (None, 1000)         257000      ['flatten[0][0]']                
                                                                                                  
 dense (Dense)                  (None, 1000)         257000      ['flatten[0][0]']                
                                                                                                  
 dense_5 (Dense)                (None, 4096)         16781312    ['dropout[0][0]']                
                                                                                                  
 dense_3 (Dense)                (None, 1000)         1001000     ['dense_2[0][0]']                
                                                                                                  
 dense_1 (Dense)                (None, 1000)         1001000     ['dense[0][0]']                  
                                                                                                  
 dropout_1 (Dropout)            (None, 4096)         0           ['dense_5[0][0]']                
                                                                                                  
 gender_out (Dense)             (None, 1)            1001        ['dense_3[0][0]']                
                                                                                                  
 ethnicity_out (Dense)          (None, 5)            5005        ['dense_1[0][0]']                
                                                                                                  
 age_out (Dense)                (None, 1)            4097        ['dropout_1[0][0]']              
                                                                                                  
==================================================================================================
Total params: 24,112,791
Trainable params: 24,110,039
Non-trainable params: 2,752
__________________________________________________________________________________________________

TRAINING¶

In [ ]:
##Defining batch size and callbacks
batch_size = 8
epochs = 40
In [ ]:
#Training model
history = model.fit(X_train, {'gender_out': y_gender_train, 'ethnicity_out': y_ethnicity_train, 'age_out': y_age_train},
                         batch_size=batch_size,
                         epochs = epochs, validation_data = (X_cv, [y_gender_cv, y_ethnicity_cv, y_age_cv]),
                         steps_per_epoch=(X_train.shape[0] // batch_size)
                    )
Epoch 1/40
1527/1527 [==============================] - 27s 14ms/step - loss: 295.3499 - gender_out_loss: 0.6633 - ethnicity_out_loss: 1.4293 - age_out_loss: 293.2575 - gender_out_accuracy: 0.6018 - ethnicity_out_accuracy: 0.4305 - age_out_mae: 12.8079 - val_loss: 209.7183 - val_gender_out_loss: 0.6272 - val_ethnicity_out_loss: 1.3928 - val_age_out_loss: 207.6982 - val_gender_out_accuracy: 0.6640 - val_ethnicity_out_accuracy: 0.4323 - val_age_out_mae: 10.8049
Epoch 2/40
1527/1527 [==============================] - 19s 13ms/step - loss: 233.1848 - gender_out_loss: 0.6285 - ethnicity_out_loss: 1.3998 - age_out_loss: 231.1564 - gender_out_accuracy: 0.6633 - ethnicity_out_accuracy: 0.4346 - age_out_mae: 11.3187 - val_loss: 196.2610 - val_gender_out_loss: 0.6090 - val_ethnicity_out_loss: 1.4092 - val_age_out_loss: 194.2428 - val_gender_out_accuracy: 0.6824 - val_ethnicity_out_accuracy: 0.4291 - val_age_out_mae: 10.4370
Epoch 3/40
1527/1527 [==============================] - 19s 13ms/step - loss: 200.4898 - gender_out_loss: 0.6270 - ethnicity_out_loss: 1.4005 - age_out_loss: 198.4626 - gender_out_accuracy: 0.6578 - ethnicity_out_accuracy: 0.4325 - age_out_mae: 10.4976 - val_loss: 180.5791 - val_gender_out_loss: 0.6204 - val_ethnicity_out_loss: 1.4079 - val_age_out_loss: 178.5509 - val_gender_out_accuracy: 0.6649 - val_ethnicity_out_accuracy: 0.4283 - val_age_out_mae: 9.8366
Epoch 4/40
1527/1527 [==============================] - 19s 13ms/step - loss: 181.8649 - gender_out_loss: 0.6293 - ethnicity_out_loss: 1.3983 - age_out_loss: 179.8372 - gender_out_accuracy: 0.6547 - ethnicity_out_accuracy: 0.4318 - age_out_mae: 9.9503 - val_loss: 168.0983 - val_gender_out_loss: 0.6395 - val_ethnicity_out_loss: 1.4008 - val_age_out_loss: 166.0581 - val_gender_out_accuracy: 0.6308 - val_ethnicity_out_accuracy: 0.4291 - val_age_out_mae: 9.6716
Epoch 5/40
1527/1527 [==============================] - 20s 13ms/step - loss: 157.4267 - gender_out_loss: 0.6328 - ethnicity_out_loss: 1.3949 - age_out_loss: 155.3989 - gender_out_accuracy: 0.6472 - ethnicity_out_accuracy: 0.4324 - age_out_mae: 9.1921 - val_loss: 163.2729 - val_gender_out_loss: 0.6304 - val_ethnicity_out_loss: 1.3911 - val_age_out_loss: 161.2514 - val_gender_out_accuracy: 0.6505 - val_ethnicity_out_accuracy: 0.4291 - val_age_out_mae: 9.1020
Epoch 6/40
1527/1527 [==============================] - 20s 13ms/step - loss: 142.2101 - gender_out_loss: 0.6292 - ethnicity_out_loss: 1.3818 - age_out_loss: 140.1989 - gender_out_accuracy: 0.6500 - ethnicity_out_accuracy: 0.4324 - age_out_mae: 8.8071 - val_loss: 156.2839 - val_gender_out_loss: 0.6312 - val_ethnicity_out_loss: 1.3857 - val_age_out_loss: 154.2670 - val_gender_out_accuracy: 0.6500 - val_ethnicity_out_accuracy: 0.4291 - val_age_out_mae: 8.7609
Epoch 7/40
1527/1527 [==============================] - 19s 13ms/step - loss: 127.2817 - gender_out_loss: 0.6323 - ethnicity_out_loss: 1.3783 - age_out_loss: 125.2713 - gender_out_accuracy: 0.6468 - ethnicity_out_accuracy: 0.4317 - age_out_mae: 8.3242 - val_loss: 155.0620 - val_gender_out_loss: 0.6392 - val_ethnicity_out_loss: 1.3722 - val_age_out_loss: 153.0505 - val_gender_out_accuracy: 0.6316 - val_ethnicity_out_accuracy: 0.4342 - val_age_out_mae: 8.8006
Epoch 8/40
1527/1527 [==============================] - 20s 13ms/step - loss: 118.7250 - gender_out_loss: 0.6364 - ethnicity_out_loss: 1.3668 - age_out_loss: 116.7217 - gender_out_accuracy: 0.6394 - ethnicity_out_accuracy: 0.4326 - age_out_mae: 8.0683 - val_loss: 155.2441 - val_gender_out_loss: 0.6306 - val_ethnicity_out_loss: 1.3781 - val_age_out_loss: 153.2355 - val_gender_out_accuracy: 0.6473 - val_ethnicity_out_accuracy: 0.4296 - val_age_out_mae: 9.0962
Epoch 9/40
1527/1527 [==============================] - 19s 13ms/step - loss: 106.6010 - gender_out_loss: 0.6338 - ethnicity_out_loss: 1.3601 - age_out_loss: 104.6071 - gender_out_accuracy: 0.6463 - ethnicity_out_accuracy: 0.4357 - age_out_mae: 7.5996 - val_loss: 150.1214 - val_gender_out_loss: 0.6293 - val_ethnicity_out_loss: 1.3700 - val_age_out_loss: 148.1221 - val_gender_out_accuracy: 0.6487 - val_ethnicity_out_accuracy: 0.4330 - val_age_out_mae: 8.8987
Epoch 10/40
1527/1527 [==============================] - 20s 13ms/step - loss: 95.8984 - gender_out_loss: 0.6344 - ethnicity_out_loss: 1.3582 - age_out_loss: 93.9060 - gender_out_accuracy: 0.6437 - ethnicity_out_accuracy: 0.4360 - age_out_mae: 7.2312 - val_loss: 145.4114 - val_gender_out_loss: 0.6343 - val_ethnicity_out_loss: 1.3533 - val_age_out_loss: 143.4238 - val_gender_out_accuracy: 0.6404 - val_ethnicity_out_accuracy: 0.4364 - val_age_out_mae: 8.4149
Epoch 11/40
1527/1527 [==============================] - 20s 13ms/step - loss: 88.8686 - gender_out_loss: 0.6366 - ethnicity_out_loss: 1.3507 - age_out_loss: 86.8814 - gender_out_accuracy: 0.6407 - ethnicity_out_accuracy: 0.4404 - age_out_mae: 6.9222 - val_loss: 148.3564 - val_gender_out_loss: 0.6353 - val_ethnicity_out_loss: 1.3624 - val_age_out_loss: 146.3587 - val_gender_out_accuracy: 0.6394 - val_ethnicity_out_accuracy: 0.4458 - val_age_out_mae: 8.6353
Epoch 12/40
1527/1527 [==============================] - 20s 13ms/step - loss: 78.8594 - gender_out_loss: 0.6354 - ethnicity_out_loss: 1.3490 - age_out_loss: 76.8750 - gender_out_accuracy: 0.6419 - ethnicity_out_accuracy: 0.4461 - age_out_mae: 6.5382 - val_loss: 143.3298 - val_gender_out_loss: 0.6313 - val_ethnicity_out_loss: 1.3644 - val_age_out_loss: 141.3342 - val_gender_out_accuracy: 0.6438 - val_ethnicity_out_accuracy: 0.4288 - val_age_out_mae: 8.2619
Epoch 13/40
1527/1527 [==============================] - 20s 13ms/step - loss: 72.6836 - gender_out_loss: 0.6365 - ethnicity_out_loss: 1.3483 - age_out_loss: 70.6989 - gender_out_accuracy: 0.6389 - ethnicity_out_accuracy: 0.4441 - age_out_mae: 6.2485 - val_loss: 139.2251 - val_gender_out_loss: 0.6247 - val_ethnicity_out_loss: 1.3702 - val_age_out_loss: 137.2302 - val_gender_out_accuracy: 0.6500 - val_ethnicity_out_accuracy: 0.4313 - val_age_out_mae: 8.1825
Epoch 14/40
1527/1527 [==============================] - 20s 13ms/step - loss: 65.9812 - gender_out_loss: 0.6374 - ethnicity_out_loss: 1.3493 - age_out_loss: 63.9944 - gender_out_accuracy: 0.6316 - ethnicity_out_accuracy: 0.4438 - age_out_mae: 5.9099 - val_loss: 148.1410 - val_gender_out_loss: 0.6350 - val_ethnicity_out_loss: 1.3777 - val_age_out_loss: 146.1284 - val_gender_out_accuracy: 0.6505 - val_ethnicity_out_accuracy: 0.4271 - val_age_out_mae: 8.3935
Epoch 15/40
1527/1527 [==============================] - 20s 13ms/step - loss: 60.6837 - gender_out_loss: 0.6359 - ethnicity_out_loss: 1.3442 - age_out_loss: 58.7036 - gender_out_accuracy: 0.6339 - ethnicity_out_accuracy: 0.4405 - age_out_mae: 5.6938 - val_loss: 149.1410 - val_gender_out_loss: 0.6297 - val_ethnicity_out_loss: 1.3672 - val_age_out_loss: 147.1441 - val_gender_out_accuracy: 0.6465 - val_ethnicity_out_accuracy: 0.4266 - val_age_out_mae: 8.6907
Epoch 16/40
1527/1527 [==============================] - 20s 13ms/step - loss: 54.6484 - gender_out_loss: 0.6309 - ethnicity_out_loss: 1.3415 - age_out_loss: 52.6760 - gender_out_accuracy: 0.6386 - ethnicity_out_accuracy: 0.4445 - age_out_mae: 5.3452 - val_loss: 171.7418 - val_gender_out_loss: 0.6331 - val_ethnicity_out_loss: 1.3648 - val_age_out_loss: 169.7440 - val_gender_out_accuracy: 0.6595 - val_ethnicity_out_accuracy: 0.4185 - val_age_out_mae: 9.0972
Epoch 17/40
1527/1527 [==============================] - 20s 13ms/step - loss: 49.8348 - gender_out_loss: 0.6332 - ethnicity_out_loss: 1.3370 - age_out_loss: 47.8646 - gender_out_accuracy: 0.6395 - ethnicity_out_accuracy: 0.4461 - age_out_mae: 5.1253 - val_loss: 144.1167 - val_gender_out_loss: 0.6268 - val_ethnicity_out_loss: 1.3476 - val_age_out_loss: 142.1423 - val_gender_out_accuracy: 0.6566 - val_ethnicity_out_accuracy: 0.4428 - val_age_out_mae: 8.3237
Epoch 18/40
1527/1527 [==============================] - 20s 13ms/step - loss: 47.7131 - gender_out_loss: 0.6263 - ethnicity_out_loss: 1.3391 - age_out_loss: 45.7477 - gender_out_accuracy: 0.6450 - ethnicity_out_accuracy: 0.4407 - age_out_mae: 4.9745 - val_loss: 144.8043 - val_gender_out_loss: 0.6375 - val_ethnicity_out_loss: 1.3494 - val_age_out_loss: 142.8174 - val_gender_out_accuracy: 0.6372 - val_ethnicity_out_accuracy: 0.4462 - val_age_out_mae: 8.4030
Epoch 19/40
1527/1527 [==============================] - 20s 13ms/step - loss: 44.1370 - gender_out_loss: 0.6257 - ethnicity_out_loss: 1.3352 - age_out_loss: 42.1760 - gender_out_accuracy: 0.6467 - ethnicity_out_accuracy: 0.4439 - age_out_mae: 4.7786 - val_loss: 144.2978 - val_gender_out_loss: 0.6246 - val_ethnicity_out_loss: 1.3534 - val_age_out_loss: 142.3199 - val_gender_out_accuracy: 0.6532 - val_ethnicity_out_accuracy: 0.4406 - val_age_out_mae: 8.3342
Epoch 20/40
1527/1527 [==============================] - 20s 13ms/step - loss: 42.1907 - gender_out_loss: 0.6240 - ethnicity_out_loss: 1.3370 - age_out_loss: 40.2297 - gender_out_accuracy: 0.6479 - ethnicity_out_accuracy: 0.4424 - age_out_mae: 4.7065 - val_loss: 153.1451 - val_gender_out_loss: 0.6391 - val_ethnicity_out_loss: 1.3610 - val_age_out_loss: 151.1450 - val_gender_out_accuracy: 0.6424 - val_ethnicity_out_accuracy: 0.4315 - val_age_out_mae: 8.2645
Epoch 21/40
1527/1527 [==============================] - 20s 13ms/step - loss: 37.8526 - gender_out_loss: 0.6154 - ethnicity_out_loss: 1.3326 - age_out_loss: 35.9045 - gender_out_accuracy: 0.6592 - ethnicity_out_accuracy: 0.4462 - age_out_mae: 4.4329 - val_loss: 157.9320 - val_gender_out_loss: 0.6067 - val_ethnicity_out_loss: 1.3326 - val_age_out_loss: 155.9927 - val_gender_out_accuracy: 0.6713 - val_ethnicity_out_accuracy: 0.4418 - val_age_out_mae: 8.4669
Epoch 22/40
1527/1527 [==============================] - 20s 13ms/step - loss: 35.7740 - gender_out_loss: 0.6018 - ethnicity_out_loss: 1.3306 - age_out_loss: 33.8416 - gender_out_accuracy: 0.6727 - ethnicity_out_accuracy: 0.4495 - age_out_mae: 4.2775 - val_loss: 149.6238 - val_gender_out_loss: 0.5655 - val_ethnicity_out_loss: 1.3457 - val_age_out_loss: 147.7126 - val_gender_out_accuracy: 0.7204 - val_ethnicity_out_accuracy: 0.4394 - val_age_out_mae: 8.2789
Epoch 23/40
1527/1527 [==============================] - 20s 13ms/step - loss: 35.2439 - gender_out_loss: 0.5976 - ethnicity_out_loss: 1.3284 - age_out_loss: 33.3179 - gender_out_accuracy: 0.6816 - ethnicity_out_accuracy: 0.4507 - age_out_mae: 4.2648 - val_loss: 147.6567 - val_gender_out_loss: 0.5898 - val_ethnicity_out_loss: 1.3491 - val_age_out_loss: 145.7177 - val_gender_out_accuracy: 0.6824 - val_ethnicity_out_accuracy: 0.4494 - val_age_out_mae: 8.3363
Epoch 24/40
1527/1527 [==============================] - 20s 13ms/step - loss: 31.8047 - gender_out_loss: 0.5836 - ethnicity_out_loss: 1.3270 - age_out_loss: 29.8939 - gender_out_accuracy: 0.6878 - ethnicity_out_accuracy: 0.4497 - age_out_mae: 4.0161 - val_loss: 149.5672 - val_gender_out_loss: 0.5869 - val_ethnicity_out_loss: 1.3561 - val_age_out_loss: 147.6243 - val_gender_out_accuracy: 0.6959 - val_ethnicity_out_accuracy: 0.4386 - val_age_out_mae: 8.1981
Epoch 25/40
1527/1527 [==============================] - 20s 13ms/step - loss: 33.2218 - gender_out_loss: 0.5769 - ethnicity_out_loss: 1.3275 - age_out_loss: 31.3174 - gender_out_accuracy: 0.6966 - ethnicity_out_accuracy: 0.4474 - age_out_mae: 4.1215 - val_loss: 150.9723 - val_gender_out_loss: 0.6197 - val_ethnicity_out_loss: 1.3626 - val_age_out_loss: 148.9900 - val_gender_out_accuracy: 0.6780 - val_ethnicity_out_accuracy: 0.4357 - val_age_out_mae: 8.3921
Epoch 26/40
1527/1527 [==============================] - 20s 13ms/step - loss: 28.6052 - gender_out_loss: 0.5567 - ethnicity_out_loss: 1.3169 - age_out_loss: 26.7316 - gender_out_accuracy: 0.7166 - ethnicity_out_accuracy: 0.4594 - age_out_mae: 3.8625 - val_loss: 145.0147 - val_gender_out_loss: 0.5493 - val_ethnicity_out_loss: 1.3555 - val_age_out_loss: 143.1100 - val_gender_out_accuracy: 0.7261 - val_ethnicity_out_accuracy: 0.4477 - val_age_out_mae: 8.1930
Epoch 27/40
1527/1527 [==============================] - 20s 13ms/step - loss: 27.2627 - gender_out_loss: 0.5456 - ethnicity_out_loss: 1.3187 - age_out_loss: 25.3984 - gender_out_accuracy: 0.7196 - ethnicity_out_accuracy: 0.4577 - age_out_mae: 3.7482 - val_loss: 149.9479 - val_gender_out_loss: 0.5430 - val_ethnicity_out_loss: 1.3552 - val_age_out_loss: 148.0497 - val_gender_out_accuracy: 0.7234 - val_ethnicity_out_accuracy: 0.4487 - val_age_out_mae: 8.1498
Epoch 28/40
1527/1527 [==============================] - 20s 13ms/step - loss: 27.2961 - gender_out_loss: 0.5239 - ethnicity_out_loss: 1.3109 - age_out_loss: 25.4613 - gender_out_accuracy: 0.7368 - ethnicity_out_accuracy: 0.4617 - age_out_mae: 3.7384 - val_loss: 151.3629 - val_gender_out_loss: 0.5338 - val_ethnicity_out_loss: 1.3410 - val_age_out_loss: 149.4880 - val_gender_out_accuracy: 0.7349 - val_ethnicity_out_accuracy: 0.4529 - val_age_out_mae: 8.3450
Epoch 29/40
1527/1527 [==============================] - 20s 13ms/step - loss: 26.1574 - gender_out_loss: 0.5175 - ethnicity_out_loss: 1.3094 - age_out_loss: 24.3306 - gender_out_accuracy: 0.7483 - ethnicity_out_accuracy: 0.4627 - age_out_mae: 3.6679 - val_loss: 147.6471 - val_gender_out_loss: 0.5124 - val_ethnicity_out_loss: 1.3439 - val_age_out_loss: 145.7910 - val_gender_out_accuracy: 0.7531 - val_ethnicity_out_accuracy: 0.4568 - val_age_out_mae: 8.2573
Epoch 30/40
1527/1527 [==============================] - 20s 13ms/step - loss: 24.7885 - gender_out_loss: 0.5055 - ethnicity_out_loss: 1.3080 - age_out_loss: 22.9750 - gender_out_accuracy: 0.7524 - ethnicity_out_accuracy: 0.4618 - age_out_mae: 3.5581 - val_loss: 140.8479 - val_gender_out_loss: 0.4960 - val_ethnicity_out_loss: 1.3333 - val_age_out_loss: 139.0186 - val_gender_out_accuracy: 0.7658 - val_ethnicity_out_accuracy: 0.4691 - val_age_out_mae: 8.0031
Epoch 31/40
1527/1527 [==============================] - 20s 13ms/step - loss: 24.8664 - gender_out_loss: 0.5005 - ethnicity_out_loss: 1.2993 - age_out_loss: 23.0667 - gender_out_accuracy: 0.7546 - ethnicity_out_accuracy: 0.4671 - age_out_mae: 3.5609 - val_loss: 143.5551 - val_gender_out_loss: 0.5211 - val_ethnicity_out_loss: 1.3321 - val_age_out_loss: 141.7018 - val_gender_out_accuracy: 0.7450 - val_ethnicity_out_accuracy: 0.4553 - val_age_out_mae: 8.2568
Epoch 32/40
1527/1527 [==============================] - 20s 13ms/step - loss: 23.1390 - gender_out_loss: 0.4924 - ethnicity_out_loss: 1.2904 - age_out_loss: 21.3562 - gender_out_accuracy: 0.7623 - ethnicity_out_accuracy: 0.4753 - age_out_mae: 3.4308 - val_loss: 144.4602 - val_gender_out_loss: 0.5204 - val_ethnicity_out_loss: 1.3148 - val_age_out_loss: 142.6250 - val_gender_out_accuracy: 0.7428 - val_ethnicity_out_accuracy: 0.4730 - val_age_out_mae: 7.9984
Epoch 33/40
1527/1527 [==============================] - 20s 13ms/step - loss: 23.4516 - gender_out_loss: 0.4841 - ethnicity_out_loss: 1.2901 - age_out_loss: 21.6775 - gender_out_accuracy: 0.7670 - ethnicity_out_accuracy: 0.4772 - age_out_mae: 3.4489 - val_loss: 146.8009 - val_gender_out_loss: 0.4789 - val_ethnicity_out_loss: 1.3264 - val_age_out_loss: 144.9956 - val_gender_out_accuracy: 0.7678 - val_ethnicity_out_accuracy: 0.4701 - val_age_out_mae: 8.2248
Epoch 34/40
1527/1527 [==============================] - 20s 13ms/step - loss: 21.5313 - gender_out_loss: 0.4692 - ethnicity_out_loss: 1.2851 - age_out_loss: 19.7770 - gender_out_accuracy: 0.7742 - ethnicity_out_accuracy: 0.4799 - age_out_mae: 3.3071 - val_loss: 145.3974 - val_gender_out_loss: 0.4720 - val_ethnicity_out_loss: 1.3064 - val_age_out_loss: 143.6190 - val_gender_out_accuracy: 0.7744 - val_ethnicity_out_accuracy: 0.4848 - val_age_out_mae: 8.0836
Epoch 35/40
1527/1527 [==============================] - 20s 13ms/step - loss: 21.1512 - gender_out_loss: 0.4780 - ethnicity_out_loss: 1.2818 - age_out_loss: 19.3914 - gender_out_accuracy: 0.7721 - ethnicity_out_accuracy: 0.4837 - age_out_mae: 3.2655 - val_loss: 148.3691 - val_gender_out_loss: 0.4780 - val_ethnicity_out_loss: 1.3099 - val_age_out_loss: 146.5814 - val_gender_out_accuracy: 0.7710 - val_ethnicity_out_accuracy: 0.4779 - val_age_out_mae: 8.1705
Epoch 36/40
1527/1527 [==============================] - 20s 13ms/step - loss: 21.0286 - gender_out_loss: 0.4744 - ethnicity_out_loss: 1.2839 - age_out_loss: 19.2704 - gender_out_accuracy: 0.7732 - ethnicity_out_accuracy: 0.4882 - age_out_mae: 3.2629 - val_loss: 145.8957 - val_gender_out_loss: 0.4676 - val_ethnicity_out_loss: 1.3062 - val_age_out_loss: 144.1219 - val_gender_out_accuracy: 0.7793 - val_ethnicity_out_accuracy: 0.4809 - val_age_out_mae: 8.0183
Epoch 37/40
1527/1527 [==============================] - 20s 13ms/step - loss: 19.1528 - gender_out_loss: 0.4530 - ethnicity_out_loss: 1.2734 - age_out_loss: 17.4264 - gender_out_accuracy: 0.7834 - ethnicity_out_accuracy: 0.4904 - age_out_mae: 3.1171 - val_loss: 154.3310 - val_gender_out_loss: 0.4453 - val_ethnicity_out_loss: 1.3050 - val_age_out_loss: 152.5806 - val_gender_out_accuracy: 0.7958 - val_ethnicity_out_accuracy: 0.4833 - val_age_out_mae: 8.2663
Epoch 38/40
1527/1527 [==============================] - 20s 13ms/step - loss: 20.2817 - gender_out_loss: 0.4516 - ethnicity_out_loss: 1.2681 - age_out_loss: 18.5621 - gender_out_accuracy: 0.7894 - ethnicity_out_accuracy: 0.4995 - age_out_mae: 3.1813 - val_loss: 148.1242 - val_gender_out_loss: 0.4600 - val_ethnicity_out_loss: 1.3074 - val_age_out_loss: 146.3568 - val_gender_out_accuracy: 0.7830 - val_ethnicity_out_accuracy: 0.4848 - val_age_out_mae: 8.3404
Epoch 39/40
1527/1527 [==============================] - 20s 13ms/step - loss: 19.8348 - gender_out_loss: 0.4452 - ethnicity_out_loss: 1.2631 - age_out_loss: 18.1265 - gender_out_accuracy: 0.7924 - ethnicity_out_accuracy: 0.4955 - age_out_mae: 3.1208 - val_loss: 144.9969 - val_gender_out_loss: 0.4730 - val_ethnicity_out_loss: 1.2812 - val_age_out_loss: 143.2426 - val_gender_out_accuracy: 0.7813 - val_ethnicity_out_accuracy: 0.4968 - val_age_out_mae: 8.0242
Epoch 40/40
1527/1527 [==============================] - 20s 13ms/step - loss: 18.4871 - gender_out_loss: 0.4414 - ethnicity_out_loss: 1.2607 - age_out_loss: 16.7850 - gender_out_accuracy: 0.7938 - ethnicity_out_accuracy: 0.4993 - age_out_mae: 3.0299 - val_loss: 146.0480 - val_gender_out_loss: 0.4521 - val_ethnicity_out_loss: 1.2942 - val_age_out_loss: 144.3016 - val_gender_out_accuracy: 0.7948 - val_ethnicity_out_accuracy: 0.4828 - val_age_out_mae: 8.1253
In [ ]:
model.save('alexnet_train.h5')

RETRAIN¶

In [ ]:
model1 = load_model('alexnet_train.h5')
In [ ]:
##Defining batch size and callbacks
batch_size = 8
epochs = 5
In [ ]:
#Training model
history = model1.fit(X_train, {'gender_out': y_gender_train, 'ethnicity_out': y_ethnicity_train, 'age_out': y_age_train},
                         batch_size=batch_size,
                         epochs = epochs, validation_data = (X_cv, [y_gender_cv, y_ethnicity_cv, y_age_cv]),
                         steps_per_epoch=(X_train.shape[0] // batch_size)
                         )
Epoch 1/5
1527/1527 [==============================] - 25s 13ms/step - loss: 21.3259 - gender_out_loss: 0.4459 - ethnicity_out_loss: 1.2651 - age_out_loss: 19.6149 - gender_out_accuracy: 0.7908 - ethnicity_out_accuracy: 0.5007 - age_out_mae: 3.2101 - val_loss: 161.3453 - val_gender_out_loss: 0.4758 - val_ethnicity_out_loss: 1.3076 - val_age_out_loss: 159.5620 - val_gender_out_accuracy: 0.7737 - val_ethnicity_out_accuracy: 0.4786 - val_age_out_mae: 8.7527
Epoch 2/5
1527/1527 [==============================] - 20s 13ms/step - loss: 16.1334 - gender_out_loss: 0.4316 - ethnicity_out_loss: 1.2454 - age_out_loss: 14.4564 - gender_out_accuracy: 0.7997 - ethnicity_out_accuracy: 0.5091 - age_out_mae: 2.8209 - val_loss: 146.3229 - val_gender_out_loss: 0.4435 - val_ethnicity_out_loss: 1.2456 - val_age_out_loss: 144.6339 - val_gender_out_accuracy: 0.7980 - val_ethnicity_out_accuracy: 0.5096 - val_age_out_mae: 7.9836
Epoch 3/5
1527/1527 [==============================] - 19s 13ms/step - loss: 16.9183 - gender_out_loss: 0.4284 - ethnicity_out_loss: 1.2402 - age_out_loss: 15.2497 - gender_out_accuracy: 0.8013 - ethnicity_out_accuracy: 0.5138 - age_out_mae: 2.8696 - val_loss: 145.5939 - val_gender_out_loss: 0.4400 - val_ethnicity_out_loss: 1.2606 - val_age_out_loss: 143.8933 - val_gender_out_accuracy: 0.7926 - val_ethnicity_out_accuracy: 0.5118 - val_age_out_mae: 7.9245
Epoch 4/5
1527/1527 [==============================] - 19s 13ms/step - loss: 17.9682 - gender_out_loss: 0.4288 - ethnicity_out_loss: 1.2413 - age_out_loss: 16.2981 - gender_out_accuracy: 0.7988 - ethnicity_out_accuracy: 0.5111 - age_out_mae: 2.9684 - val_loss: 143.2758 - val_gender_out_loss: 0.4257 - val_ethnicity_out_loss: 1.2426 - val_age_out_loss: 141.6077 - val_gender_out_accuracy: 0.8066 - val_ethnicity_out_accuracy: 0.5167 - val_age_out_mae: 7.8733
Epoch 5/5
1527/1527 [==============================] - 19s 13ms/step - loss: 17.7094 - gender_out_loss: 0.4191 - ethnicity_out_loss: 1.2332 - age_out_loss: 16.0570 - gender_out_accuracy: 0.8074 - ethnicity_out_accuracy: 0.5179 - age_out_mae: 2.9312 - val_loss: 148.9784 - val_gender_out_loss: 0.4456 - val_ethnicity_out_loss: 1.2621 - val_age_out_loss: 147.2708 - val_gender_out_accuracy: 0.7955 - val_ethnicity_out_accuracy: 0.5093 - val_age_out_mae: 8.0724
In [ ]:
model1.save('alexnet_retrain.h5')
In [ ]:
model2 = load_model('alexnet_retrain.h5')

EVALUATION¶

In [ ]:
pred = model2.predict(X_test)

#Accuracy in gender prediction
#Accuracy in ethnicity prediction
#Age mae in age prediction
test_loss,test_gender_loss, test_ethnicity_loss, test_age_loss, test_gender_acc,test_ethnicity_acc,test_age_mae = model2.evaluate(X_test, 
                                                                                        [y_gender_test, y_ethnicity_test, y_age_test], verbose=0)
print(f'\nTest gender accuracy: {test_gender_acc}')
print(f'\nTest ethnicity accuracy: {test_ethnicity_acc}')
print(f'\nTest age MAE: {test_age_mae}')
128/128 [==============================] - 1s 4ms/step

Test gender accuracy: 0.7943053245544434

Test ethnicity accuracy: 0.5044182538986206

Test age MAE: 8.20291519165039

ACTIVATION MAP GENERATION¶

In [ ]:
gender_weights = model2.layers[-3].get_weights()[0]
ethnicity_weights = model2.layers[-2].get_weights()[0]
age_weights = model2.layers[-1].get_weights()[0]

WE USE THE LAST CONVOLUTIONAL LAYER AND THE RESPECTIVE OUTPUT LAYERS OF THE MODEL FOR EACH ATTRIBUTE TO GENERATE AN ACTIVATION MAP

In [ ]:
gender_model  = Model(inputs=model2.input,outputs=(model2.layers[-15].output,model2.layers[-3].output))
ethnicity_model = Model(inputs=model2.input,outputs=(model2.layers[-15].output,model2.layers[-2].output))
age_model = Model(inputs=model2.input,outputs=(model2.layers[-15].output,model2.layers[-1].output))
In [ ]:
features_gender, results_gender = gender_model.predict(X_test)
features_ethnicity, results_ethnicity = ethnicity_model.predict(X_test)
features_age, results_age = age_model.predict(X_test)
128/128 [==============================] - 1s 3ms/step
128/128 [==============================] - 0s 2ms/step
128/128 [==============================] - 0s 2ms/step
In [ ]:
##CLASS ACTIVATION MAP

def activation_map(features, weights, results,x,y,z,att):
  
  for idx in range(x,y):

        features_for_one_img = features[idx,:,:,:]
        height_roomout = X_train.shape[1]/features_for_one_img.shape[0]
        width_roomout  = X_train.shape[2]/features_for_one_img.shape[1]
    
        cam_features = sp.ndimage.zoom(features_for_one_img, (height_roomout, width_roomout, z),order = 1)
        pred = np.argmax(results[idx])
        plt.figure(facecolor='white')
        cam_weights = weights[:,pred]
        cam_output  = np.dot(cam_features,cam_weights)
        fig, axs = plt.subplots(1, 2, figsize=(4, 4))
        plt.grid(False)
        plt.xticks([])
        plt.yticks([])
        if att=='eth':
          buf = 'Predicted Class = ' +str(y_test[idx][1])
        elif att=='gen':
          buf = 'Predicted Class = ' +str(y_test[idx][0]) 
        if att=='age':
          buf = 'Predicted Class = ' +str(y_test[idx][2])         
        # plot original image
        plt.xlabel(buf)
        axs[0].imshow(X_test[idx], alpha=0.5)
        axs[0].set_xlabel("Original Image")
        axs[0].grid(False)
        axs[0].set_axis_off()

        # plot activation map
        axs[1].imshow(np.squeeze(X_test[idx]), alpha=0.7)
        axs[1].imshow(cam_output, cmap='jet', alpha=0.5)
        axs[1].set_title("Class Activation Map")
        axs[1].grid(False)
        axs[0].set_axis_off()
        axs[1].tick_params(axis='both', which='both', length=0)  # Remove ticks

        plt.show()

        axs[0].tick_params(axis='both', which='both', length=0)  # Remove ticks

        # plot activation map
        axs[1].imshow(np.squeeze(X_test[idx]), alpha=0.7)
        axs[1].imshow(cam_output, cmap='jet', alpha=0.5)
        axs[1].set_title("Class Activation Map")
        axs[1].grid(False)
        axs[0].set_axis_off()
        axs[1].tick_params(axis='both', which='both', length=0)  # Remove ticks

        plt.show()
In [ ]:
#activation map to depict how the CNN learns to detect ethnicity
activation_map(features_ethnicity, ethnicity_weights, results_ethnicity,82,93,3.906,'eth')
<Figure size 640x480 with 0 Axes>
<Figure size 640x480 with 0 Axes>
<Figure size 640x480 with 0 Axes>
<Figure size 640x480 with 0 Axes>
<Figure size 640x480 with 0 Axes>
<Figure size 640x480 with 0 Axes>
<Figure size 640x480 with 0 Axes>
<Figure size 640x480 with 0 Axes>
<Figure size 640x480 with 0 Axes>
<Figure size 640x480 with 0 Axes>
<Figure size 640x480 with 0 Axes>
In [ ]:
#activation map to depict how the CNN learns to detect gender
activation_map(features_gender, gender_weights, results_gender,17,32,3.906,'gen')
<Figure size 640x480 with 0 Axes>
<Figure size 640x480 with 0 Axes>
<Figure size 640x480 with 0 Axes>
<Figure size 640x480 with 0 Axes>
<Figure size 640x480 with 0 Axes>
<Figure size 640x480 with 0 Axes>
<Figure size 640x480 with 0 Axes>
<Figure size 640x480 with 0 Axes>
<Figure size 640x480 with 0 Axes>
<Figure size 640x480 with 0 Axes>
<Figure size 640x480 with 0 Axes>
<Figure size 640x480 with 0 Axes>
<Figure size 640x480 with 0 Axes>
<Figure size 640x480 with 0 Axes>
<Figure size 640x480 with 0 Axes>
In [ ]:
#activation map to depict how the CNN learns to detect age
activation_map(features_age, age_weights, results_age,45,56,16,'age')
<Figure size 640x480 with 0 Axes>
<Figure size 640x480 with 0 Axes>
<Figure size 640x480 with 0 Axes>
<Figure size 640x480 with 0 Axes>
<Figure size 640x480 with 0 Axes>
<Figure size 640x480 with 0 Axes>
<Figure size 640x480 with 0 Axes>
<Figure size 640x480 with 0 Axes>
<Figure size 640x480 with 0 Axes>
<Figure size 640x480 with 0 Axes>
<Figure size 640x480 with 0 Axes>

MODEL ARCHITECTURE¶

In [ ]:
tf.keras.utils.plot_model(
    model1, to_file='alexnet_model.png', show_shapes=True, show_layer_names=True,
    rankdir='TB', expand_nested=False, dpi=96
)
Out[ ]:

Making predictions on test data¶

In [ ]:
#SHOWING THE RESULTS OF A SUBSET OF TEST DATA
for i in range(20, 55):
    plt.figure(figsize=(2, 2))
    gt_age = y_test[i][2]
    gt_ethnicity = y_test[i][1]
    gt_gender = y_test[i][0]
    print("GROUND TRUTH:")
    print("Gender:", gt_gender)
    print("Ethnicity:", gt_ethnicity)
    print("Age:", gt_age)
    plt.imshow(X_test[i], interpolation='nearest')
    plt.grid(False)
    plt.xticks([])
    plt.yticks([])
    plt.show()   
    pred_gender, pred_ethnicity, pred_age = model2.predict(X_test[i][np.newaxis])
    gender_acc = np.round(pred_gender)
    ethnicity_acc = np.mean(np.argmax(pred_ethnicity))
    age_mae = np.abs(pred_age)
    print("\nPREDICTED:")
    print("Gender:", gender_acc)
    print("Ethnicity:", ethnicity_acc)
    print("Age:", age_mae)    
    print("\n------------------------------------------------\n")
    
GROUND TRUTH:
Gender: 1
Ethnicity: 1
Age: 22
1/1 [==============================] - 0s 26ms/step

PREDICTED:
Gender: [[1.]]
Ethnicity: 3.0
Age: [[24.944618]]

------------------------------------------------

GROUND TRUTH:
Gender: 0
Ethnicity: 1
Age: 32
1/1 [==============================] - 0s 28ms/step

PREDICTED:
Gender: [[0.]]
Ethnicity: 0.0
Age: [[47.633713]]

------------------------------------------------

GROUND TRUTH:
Gender: 0
Ethnicity: 0
Age: 25
1/1 [==============================] - 0s 22ms/step

PREDICTED:
Gender: [[0.]]
Ethnicity: 3.0
Age: [[40.86227]]

------------------------------------------------

GROUND TRUTH:
Gender: 1
Ethnicity: 2
Age: 4
1/1 [==============================] - 0s 26ms/step

PREDICTED:
Gender: [[1.]]
Ethnicity: 2.0
Age: [[4.6020846]]

------------------------------------------------

GROUND TRUTH:
Gender: 0
Ethnicity: 0
Age: 47
1/1 [==============================] - 0s 19ms/step

PREDICTED:
Gender: [[0.]]
Ethnicity: 0.0
Age: [[42.339268]]

------------------------------------------------

GROUND TRUTH:
Gender: 0
Ethnicity: 0
Age: 50
1/1 [==============================] - 0s 20ms/step

PREDICTED:
Gender: [[0.]]
Ethnicity: 0.0
Age: [[34.999382]]

------------------------------------------------

GROUND TRUTH:
Gender: 0
Ethnicity: 3
Age: 24
1/1 [==============================] - 0s 19ms/step

PREDICTED:
Gender: [[0.]]
Ethnicity: 3.0
Age: [[24.649845]]

------------------------------------------------

GROUND TRUTH:
Gender: 1
Ethnicity: 0
Age: 35
1/1 [==============================] - 0s 19ms/step

PREDICTED:
Gender: [[1.]]
Ethnicity: 0.0
Age: [[40.04907]]

------------------------------------------------

GROUND TRUTH:
Gender: 0
Ethnicity: 0
Age: 52
1/1 [==============================] - 0s 21ms/step

PREDICTED:
Gender: [[0.]]
Ethnicity: 0.0
Age: [[35.629993]]

------------------------------------------------

GROUND TRUTH:
Gender: 1
Ethnicity: 0
Age: 26
1/1 [==============================] - 0s 19ms/step

PREDICTED:
Gender: [[1.]]
Ethnicity: 1.0
Age: [[30.24192]]

------------------------------------------------

GROUND TRUTH:
Gender: 1
Ethnicity: 0
Age: 18
1/1 [==============================] - 0s 19ms/step

PREDICTED:
Gender: [[1.]]
Ethnicity: 3.0
Age: [[24.299454]]

------------------------------------------------

GROUND TRUTH:
Gender: 0
Ethnicity: 0
Age: 46
1/1 [==============================] - 0s 22ms/step

PREDICTED:
Gender: [[0.]]
Ethnicity: 0.0
Age: [[26.458572]]

------------------------------------------------

GROUND TRUTH:
Gender: 0
Ethnicity: 0
Age: 22
1/1 [==============================] - 0s 20ms/step

PREDICTED:
Gender: [[0.]]
Ethnicity: 0.0
Age: [[24.102188]]

------------------------------------------------

GROUND TRUTH:
Gender: 0
Ethnicity: 0
Age: 58
1/1 [==============================] - 0s 18ms/step

PREDICTED:
Gender: [[0.]]
Ethnicity: 0.0
Age: [[52.584415]]

------------------------------------------------

GROUND TRUTH:
Gender: 1
Ethnicity: 3
Age: 25
1/1 [==============================] - 0s 19ms/step

PREDICTED:
Gender: [[1.]]
Ethnicity: 3.0
Age: [[31.268646]]

------------------------------------------------

GROUND TRUTH:
Gender: 0
Ethnicity: 1
Age: 52
1/1 [==============================] - 0s 19ms/step

PREDICTED:
Gender: [[0.]]
Ethnicity: 0.0
Age: [[48.987114]]

------------------------------------------------

GROUND TRUTH:
Gender: 1
Ethnicity: 0
Age: 48
1/1 [==============================] - 0s 18ms/step

PREDICTED:
Gender: [[1.]]
Ethnicity: 0.0
Age: [[50.633057]]

------------------------------------------------

GROUND TRUTH:
Gender: 1
Ethnicity: 3
Age: 30
1/1 [==============================] - 0s 23ms/step

PREDICTED:
Gender: [[1.]]
Ethnicity: 0.0
Age: [[42.821606]]

------------------------------------------------

GROUND TRUTH:
Gender: 1
Ethnicity: 0
Age: 7
1/1 [==============================] - 0s 20ms/step

PREDICTED:
Gender: [[1.]]
Ethnicity: 0.0
Age: [[7.338527]]

------------------------------------------------

GROUND TRUTH:
Gender: 0
Ethnicity: 1
Age: 34
1/1 [==============================] - 0s 20ms/step

PREDICTED:
Gender: [[0.]]
Ethnicity: 0.0
Age: [[37.80028]]

------------------------------------------------

GROUND TRUTH:
Gender: 0
Ethnicity: 0
Age: 28
1/1 [==============================] - 0s 25ms/step

PREDICTED:
Gender: [[0.]]
Ethnicity: 0.0
Age: [[33.57394]]

------------------------------------------------

GROUND TRUTH:
Gender: 1
Ethnicity: 3
Age: 26
1/1 [==============================] - 0s 26ms/step

PREDICTED:
Gender: [[1.]]
Ethnicity: 3.0
Age: [[24.257671]]

------------------------------------------------

GROUND TRUTH:
Gender: 0
Ethnicity: 0
Age: 14
1/1 [==============================] - 0s 32ms/step

PREDICTED:
Gender: [[1.]]
Ethnicity: 0.0
Age: [[33.908077]]

------------------------------------------------

GROUND TRUTH:
Gender: 1
Ethnicity: 3
Age: 28
1/1 [==============================] - 0s 28ms/step

PREDICTED:
Gender: [[0.]]
Ethnicity: 3.0
Age: [[27.303455]]

------------------------------------------------

GROUND TRUTH:
Gender: 1
Ethnicity: 0
Age: 4
1/1 [==============================] - 0s 30ms/step

PREDICTED:
Gender: [[1.]]
Ethnicity: 0.0
Age: [[6.5066833]]

------------------------------------------------

GROUND TRUTH:
Gender: 0
Ethnicity: 1
Age: 46
1/1 [==============================] - 0s 26ms/step

PREDICTED:
Gender: [[0.]]
Ethnicity: 1.0
Age: [[36.878963]]

------------------------------------------------

GROUND TRUTH:
Gender: 1
Ethnicity: 2
Age: 23
1/1 [==============================] - 0s 29ms/step

PREDICTED:
Gender: [[1.]]
Ethnicity: 0.0
Age: [[25.879627]]

------------------------------------------------

GROUND TRUTH:
Gender: 1
Ethnicity: 4
Age: 26
1/1 [==============================] - 0s 33ms/step

PREDICTED:
Gender: [[1.]]
Ethnicity: 3.0
Age: [[25.635014]]

------------------------------------------------

GROUND TRUTH:
Gender: 1
Ethnicity: 1
Age: 22
1/1 [==============================] - 0s 32ms/step

PREDICTED:
Gender: [[1.]]
Ethnicity: 1.0
Age: [[26.116226]]

------------------------------------------------

GROUND TRUTH:
Gender: 0
Ethnicity: 4
Age: 75
1/1 [==============================] - 0s 30ms/step

PREDICTED:
Gender: [[0.]]
Ethnicity: 0.0
Age: [[59.60473]]

------------------------------------------------

GROUND TRUTH:
Gender: 1
Ethnicity: 0
Age: 53
1/1 [==============================] - 0s 35ms/step

PREDICTED:
Gender: [[0.]]
Ethnicity: 1.0
Age: [[36.723083]]

------------------------------------------------

GROUND TRUTH:
Gender: 0
Ethnicity: 3
Age: 47
1/1 [==============================] - 0s 26ms/step

PREDICTED:
Gender: [[0.]]
Ethnicity: 1.0
Age: [[44.41739]]

------------------------------------------------

GROUND TRUTH:
Gender: 0
Ethnicity: 0
Age: 35
1/1 [==============================] - 0s 30ms/step

PREDICTED:
Gender: [[0.]]
Ethnicity: 0.0
Age: [[33.82643]]

------------------------------------------------

GROUND TRUTH:
Gender: 1
Ethnicity: 2
Age: 22
1/1 [==============================] - 0s 32ms/step

PREDICTED:
Gender: [[1.]]
Ethnicity: 3.0
Age: [[23.721352]]

------------------------------------------------

GROUND TRUTH:
Gender: 0
Ethnicity: 0
Age: 30
1/1 [==============================] - 0s 25ms/step

PREDICTED:
Gender: [[0.]]
Ethnicity: 0.0
Age: [[48.286003]]

------------------------------------------------

TEST ON NON MASKED IMAGES¶

In [ ]:
i = 1
for filename in os.listdir('/content/drive/MyDrive/new/'):
      img = cv2.imread('/content/drive/MyDrive/new/'+ str(filename))
      img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
      age,gender,ethnicity = filename.split("_")[:3]
      print(i,".","GROUND TRUTH:")
      print("gender :", gender)
      print("ethnicity:", ethnicity)
      print("age:", age,"\n")
      # mg = np.mean(color_img, axis=2)
      resized_img = cv2.resize(img, (50, 50), interpolation=cv2.INTER_LINEAR)
      plt.figure(figsize=(2, 2))
      plt.imshow(img, interpolation='nearest')
      plt.grid(False)
      plt.xticks([])
      plt.yticks([])
      plt.show()
      flattened_img = resized_img.flatten()
      normalized_img = (flattened_img - flattened_img.min()) / (flattened_img.max() - flattened_img.min())
      normalized_img = normalized_img.reshape(1,50,50,3)
      x = normalized_img[0]
      pred_gender, pred_ethnicity, pred_age = model2.predict(x[np.newaxis])
      gender_acc = np.round(pred_gender)
      ethnicity_acc = np.mean(np.argmax(pred_ethnicity))
      age_mae = np.abs(pred_age)
      i = i+1
      print("PREDICTED:")
      print("gender :", gender_acc[0])
      print("ethnicity:", ethnicity_acc)
      print("age:", age_mae[0])
      print("---------------------------------------\n")
      
      
1 . GROUND TRUTH:
gender : 1
ethnicity: 3
age: 26 

1/1 [==============================] - 0s 27ms/step
PREDICTED:
gender : [1.]
ethnicity: 3.0
age: [25.231037]
---------------------------------------

2 . GROUND TRUTH:
gender : 1
ethnicity: 4
age: 26 

1/1 [==============================] - 0s 26ms/step
PREDICTED:
gender : [1.]
ethnicity: 3.0
age: [26.269806]
---------------------------------------

3 . GROUND TRUTH:
gender : 0
ethnicity: 3
age: 27 

1/1 [==============================] - 0s 34ms/step
PREDICTED:
gender : [0.]
ethnicity: 3.0
age: [28.934586]
---------------------------------------

4 . GROUND TRUTH:
gender : 1
ethnicity: 3
age: 27 

1/1 [==============================] - 0s 28ms/step
PREDICTED:
gender : [1.]
ethnicity: 3.0
age: [40.858856]
---------------------------------------

5 . GROUND TRUTH:
gender : 0
ethnicity: 0
age: 37 

1/1 [==============================] - 0s 29ms/step
PREDICTED:
gender : [0.]
ethnicity: 0.0
age: [41.992577]
---------------------------------------

6 . GROUND TRUTH:
gender : 0
ethnicity: 1
age: 42 

1/1 [==============================] - 0s 29ms/step
PREDICTED:
gender : [0.]
ethnicity: 0.0
age: [49.404766]
---------------------------------------

7 . GROUND TRUTH:
gender : 0
ethnicity: 2
age: 42 

1/1 [==============================] - 0s 31ms/step
PREDICTED:
gender : [0.]
ethnicity: 0.0
age: [44.345478]
---------------------------------------

8 . GROUND TRUTH:
gender : 1
ethnicity: 1
age: 44 

1/1 [==============================] - 0s 26ms/step
PREDICTED:
gender : [1.]
ethnicity: 0.0
age: [43.77928]
---------------------------------------

9 . GROUND TRUTH:
gender : 0
ethnicity: 1
age: 45 

1/1 [==============================] - 0s 18ms/step
PREDICTED:
gender : [0.]
ethnicity: 0.0
age: [42.42116]
---------------------------------------

10 . GROUND TRUTH:
gender : 0
ethnicity: 3
age: 45 

1/1 [==============================] - 0s 25ms/step
PREDICTED:
gender : [0.]
ethnicity: 3.0
age: [39.699314]
---------------------------------------

11 . GROUND TRUTH:
gender : 1
ethnicity: 0
age: 45 

1/1 [==============================] - 0s 26ms/step
PREDICTED:
gender : [1.]
ethnicity: 0.0
age: [36.327225]
---------------------------------------

12 . GROUND TRUTH:
gender : 0
ethnicity: 1
age: 46 

1/1 [==============================] - 0s 22ms/step
PREDICTED:
gender : [0.]
ethnicity: 0.0
age: [34.375248]
---------------------------------------

13 . GROUND TRUTH:
gender : 1
ethnicity: 0
age: 53 

1/1 [==============================] - 0s 27ms/step
PREDICTED:
gender : [1.]
ethnicity: 1.0
age: [43.914906]
---------------------------------------

14 . GROUND TRUTH:
gender : 0
ethnicity: 0
age: 55 

1/1 [==============================] - 0s 20ms/step
PREDICTED:
gender : [0.]
ethnicity: 0.0
age: [48.844723]
---------------------------------------

15 . GROUND TRUTH:
gender : 1
ethnicity: 2
age: 6 

1/1 [==============================] - 0s 25ms/step
PREDICTED:
gender : [1.]
ethnicity: 0.0
age: [5.158304]
---------------------------------------

16 . GROUND TRUTH:
gender : 1
ethnicity: 1
age: 70 

1/1 [==============================] - 0s 19ms/step
PREDICTED:
gender : [0.]
ethnicity: 0.0
age: [53.067448]
---------------------------------------

17 . GROUND TRUTH:
gender : 0
ethnicity: 1
age: 75 

1/1 [==============================] - 0s 27ms/step
PREDICTED:
gender : [0.]
ethnicity: 0.0
age: [66.880165]
---------------------------------------

18 . GROUND TRUTH:
gender : 0
ethnicity: 3
age: 75 

1/1 [==============================] - 0s 25ms/step
PREDICTED:
gender : [0.]
ethnicity: 0.0
age: [57.084934]
---------------------------------------

19 . GROUND TRUTH:
gender : 0
ethnicity: 0
age: 80 

1/1 [==============================] - 0s 26ms/step
PREDICTED:
gender : [0.]
ethnicity: 0.0
age: [73.7093]
---------------------------------------

Testing on Real-time occluded facial images - ROF dataset¶

To evaluate the model’s performance on facial images that do not have superimposed occlusions, we used a subset of the “Real World Occluded Faces (ROF)” dataset.

In [ ]:
for filename in os.listdir('/content/drive/MyDrive/masked/'):
  print("\n",filename,"\n")
  path = '/content/drive/MyDrive/masked/'+str(filename)+'/'
  for f in os.listdir(path):
      # print(f)
      img = cv2.imread(path+ str(f))
      img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
      resized_img = cv2.resize(img, (50, 50), interpolation=cv2.INTER_LINEAR)
      plt.figure(figsize=(2, 2))
      plt.imshow(img, interpolation='nearest')
      plt.grid(False)
      plt.xticks([])
      plt.yticks([])
      plt.show()
      flattened_img = resized_img.flatten()
      normalized_img = (flattened_img - flattened_img.min()) / (flattened_img.max() - flattened_img.min())
      normalized_img = normalized_img.reshape(1,50,50,3)
      x = normalized_img[0]
      pred_gender, pred_ethnicity, pred_age = model2.predict(x[np.newaxis])
      gender_acc = np.round(pred_gender)
      ethnicity_acc = np.mean(np.argmax(pred_ethnicity))
      age_mae = np.abs(pred_age)
      print("Test gender accuracy for single image:", gender_acc)
      print("Test ethnicity accuracy for single image:", ethnicity_acc)
      print("Test age MAE for single image:", age_mae)
 33_0_0_thomas_muller_wearing_mask 

1/1 [==============================] - 0s 26ms/step
Test gender accuracy for single image: [[0.]]
Test ethnicity accuracy for single image: 0.0
Test age MAE for single image: [[30.417236]]

 26_1_2_naomi_osaka_wearing_mask 

1/1 [==============================] - 0s 18ms/step
Test gender accuracy for single image: [[1.]]
Test ethnicity accuracy for single image: 1.0
Test age MAE for single image: [[25.811928]]

 52_0_0_justin_trudeau_wearing_mask 

1/1 [==============================] - 0s 25ms/step
Test gender accuracy for single image: [[1.]]
Test ethnicity accuracy for single image: 0.0
Test age MAE for single image: [[44.844246]]

 60_1_3_kamala_haris_wearing_mask 

1/1 [==============================] - 0s 25ms/step
Test gender accuracy for single image: [[0.]]
Test ethnicity accuracy for single image: 0.0
Test age MAE for single image: [[47.14628]]

 40_0_1_lewis_hamilton_wearing_mask 

1/1 [==============================] - 0s 25ms/step
Test gender accuracy for single image: [[0.]]
Test ethnicity accuracy for single image: 1.0
Test age MAE for single image: [[56.405163]]

 64_0_0_gary_peters_wearing_sunglasses 

1/1 [==============================] - 0s 28ms/step
Test gender accuracy for single image: [[1.]]
Test ethnicity accuracy for single image: 0.0
Test age MAE for single image: [[72.6852]]

 60_0_0_jean_castex_wearing_mask 

1/1 [==============================] - 0s 30ms/step
Test gender accuracy for single image: [[0.]]
Test ethnicity accuracy for single image: 0.0
Test age MAE for single image: [[61.82514]]

 23_0_1_jayson_tatum_wearing_mask 

1/1 [==============================] - 0s 28ms/step
Test gender accuracy for single image: [[0.]]
Test ethnicity accuracy for single image: 0.0
Test age MAE for single image: [[47.97095]]

 46_0_0_immanuel_macron_wearing_mask 

1/1 [==============================] - 0s 24ms/step
Test gender accuracy for single image: [[1.]]
Test ethnicity accuracy for single image: 0.0
Test age MAE for single image: [[68.02141]]

 26_0_0_george_russell_wearing_mask 

1/1 [==============================] - 0s 25ms/step
Test gender accuracy for single image: [[0.]]
Test ethnicity accuracy for single image: 0.0
Test age MAE for single image: [[56.16721]]

 73_1_1_debbie_stabenow_wearing_mask 

1/1 [==============================] - 0s 24ms/step
Test gender accuracy for single image: [[0.]]
Test ethnicity accuracy for single image: 0.0
Test age MAE for single image: [[66.48702]]

 33_0_0_diego_schwartzman_wearing_mask 

1/1 [==============================] - 0s 25ms/step
Test gender accuracy for single image: [[0.]]
Test ethnicity accuracy for single image: 0.0
Test age MAE for single image: [[44.88432]]

 67_0_0_bill_gates_wearing_mask 

1/1 [==============================] - 0s 20ms/step
Test gender accuracy for single image: [[0.]]
Test ethnicity accuracy for single image: 0.0
Test age MAE for single image: [[79.24071]]

 45_0_0_bruno_fernandes_wearing_mask 

1/1 [==============================] - 0s 24ms/step
Test gender accuracy for single image: [[0.]]
Test ethnicity accuracy for single image: 0.0
Test age MAE for single image: [[49.604534]]

 30_0_4_carlos_sainz_wearing_mask 

1/1 [==============================] - 0s 19ms/step
Test gender accuracy for single image: [[1.]]
Test ethnicity accuracy for single image: 2.0
Test age MAE for single image: [[33.714268]]

 25_0_0_charles_leclerc_wearing_mask 

1/1 [==============================] - 0s 22ms/step
Test gender accuracy for single image: [[0.]]
Test ethnicity accuracy for single image: 0.0
Test age MAE for single image: [[54.74074]]

 71_0_0_chuck_schumer_wearing_mask 

1/1 [==============================] - 0s 22ms/step
Test gender accuracy for single image: [[0.]]
Test ethnicity accuracy for single image: 0.0
Test age MAE for single image: [[64.02006]]

 36_0_0_alexander_zverev_wearing_mask 

1/1 [==============================] - 0s 28ms/step
Test gender accuracy for single image: [[0.]]
Test ethnicity accuracy for single image: 3.0
Test age MAE for single image: [[30.766315]]

 45_1_0_evangeline_lilly_wearing_sunglasses 

1/1 [==============================] - 0s 26ms/step
Test gender accuracy for single image: [[0.]]
Test ethnicity accuracy for single image: 0.0
Test age MAE for single image: [[53.621925]]

 38_1_0_emily_blunt_wearing_sunglasses 

1/1 [==============================] - 0s 27ms/step
Test gender accuracy for single image: [[0.]]
Test ethnicity accuracy for single image: 0.0
Test age MAE for single image: [[74.77888]]

 32_0_0_chris_hemsworth_wearing_sunglasses 

1/1 [==============================] - 0s 31ms/step
Test gender accuracy for single image: [[0.]]
Test ethnicity accuracy for single image: 0.0
Test age MAE for single image: [[48.000103]]

 50_0_0_christian_bale_wearing_sunglasses 

1/1 [==============================] - 0s 19ms/step
Test gender accuracy for single image: [[0.]]
Test ethnicity accuracy for single image: 0.0
Test age MAE for single image: [[50.35259]]

 68_0_0_bruce_willis_wearing_sunglasses 

1/1 [==============================] - 0s 25ms/step
Test gender accuracy for single image: [[0.]]
Test ethnicity accuracy for single image: 0.0
Test age MAE for single image: [[73.806305]]

 59_0_0_gary_peters_mask 

1/1 [==============================] - 0s 37ms/step
Test gender accuracy for single image: [[0.]]
Test ethnicity accuracy for single image: 0.0
Test age MAE for single image: [[53.06415]]

 48_0_0_adrien_brody_wearing_sunglasses 

1/1 [==============================] - 0s 25ms/step
Test gender accuracy for single image: [[0.]]
Test ethnicity accuracy for single image: 0.0
Test age MAE for single image: [[58.980083]]

 44_0_1_anthony_mackie_wearing_sunglasses 

1/1 [==============================] - 0s 27ms/step
Test gender accuracy for single image: [[0.]]
Test ethnicity accuracy for single image: 0.0
Test age MAE for single image: [[59.986362]]

 41_0_0_benedict_cumberbatch_wearing_sunglasses 

1/1 [==============================] - 0s 25ms/step
Test gender accuracy for single image: [[0.]]
Test ethnicity accuracy for single image: 0.0
Test age MAE for single image: [[56.47261]]

 75_0_0_arnold_schwarzenegger_wearing_sunglasses 

1/1 [==============================] - 0s 25ms/step
Test gender accuracy for single image: [[0.]]
Test ethnicity accuracy for single image: 0.0
Test age MAE for single image: [[35.052162]]

 52_0_0_brad_pitt_wearing_sunglasses 

1/1 [==============================] - 0s 19ms/step
Test gender accuracy for single image: [[1.]]
Test ethnicity accuracy for single image: 1.0
Test age MAE for single image: [[25.837273]]

 30_1_0_brie_larson_wearing_sunglasses 

1/1 [==============================] - 0s 19ms/step
Test gender accuracy for single image: [[0.]]
Test ethnicity accuracy for single image: 0.0
Test age MAE for single image: [[40.502937]]

 63_0_0_ben_mendelsohn_wearing_sunglasses 

1/1 [==============================] - 0s 28ms/step
Test gender accuracy for single image: [[0.]]
Test ethnicity accuracy for single image: 0.0
Test age MAE for single image: [[64.12533]]

 72_0_0_patty_murray_wearing_mask 

1/1 [==============================] - 0s 25ms/step
Test gender accuracy for single image: [[0.]]
Test ethnicity accuracy for single image: 0.0
Test age MAE for single image: [[39.042404]]

VIZUALIZATION AND EVALUATION¶

In [ ]:
pr=np.round(pred[0])
np.round(pred[0], 2)
Y_pred_gender = pred[0]
print(Y_pred_gender,y_gender_test)
Y_true_gender = y_gender_test
cm = confusion_matrix(Y_true_gender,pr)
sns.heatmap(cm, annot=True, fmt='d', cbar=False, cmap='Greens');
[[0.1486084 ]
 [0.9892729 ]
 [0.22728018]
 ...
 [0.01487922]
 [0.00660977]
 [0.34393966]] [0 1 0 ... 0 0 1]
In [ ]:
from sklearn.metrics import classification_report, confusion_matrix
Y_pred_Ethn = np.argmax(pred[1],axis=1)
Y_true_ethnicity = np.argmax(y_ethnicity_test,axis = 1)
print('Confusion Matrix')
print(confusion_matrix(Y_true_ethnicity, Y_pred_Ethn))
print('Classification Report')
target_names = ['white', 'black','asian','indian','others']
print(classification_report(Y_true_ethnicity, Y_pred_Ethn, target_names=target_names))
Confusion Matrix
[[1415  161   41  119    0]
 [ 254  460   16   83    0]
 [ 280   84  107   54    0]
 [ 347  176   12  172    0]
 [ 172   46   16   59    0]]
Classification Report
              precision    recall  f1-score   support

       white       0.57      0.82      0.67      1736
       black       0.50      0.57      0.53       813
       asian       0.56      0.20      0.30       525
      indian       0.35      0.24      0.29       707
      others       0.00      0.00      0.00       293

    accuracy                           0.53      4074
   macro avg       0.40      0.37      0.36      4074
weighted avg       0.48      0.53      0.48      4074

/usr/local/lib/python3.10/dist-packages/sklearn/metrics/_classification.py:1344: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples. Use `zero_division` parameter to control this behavior.
  _warn_prf(average, modifier, msg_start, len(result))
/usr/local/lib/python3.10/dist-packages/sklearn/metrics/_classification.py:1344: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples. Use `zero_division` parameter to control this behavior.
  _warn_prf(average, modifier, msg_start, len(result))
/usr/local/lib/python3.10/dist-packages/sklearn/metrics/_classification.py:1344: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples. Use `zero_division` parameter to control this behavior.
  _warn_prf(average, modifier, msg_start, len(result))
In [ ]:
#confusion matrix for ethnicity
cm = confusion_matrix(Y_true_ethnicity,Y_pred_Ethn)
print("Confusion Matrix")
#np.fill_diagonal(cm, 0) #Fill diagonal with 0 for better visualisation
sns.heatmap(cm, annot=True, fmt='d', cbar=False, cmap='Greens');
Confusion Matrix
In [ ]:
#Plotting predicted age versus the actual labels of test, Ideally it should have been a staright line
plt.figure(figsize=(8, 5))
plt.scatter(pred[2], y_age_test);